Added changes by @knu

Chris Eidhof лет %!s(int64=9): %!d(string=назад)
Родитель
Сommit
d659152db1
1 измененных файлов с 8 добавлено и 18 удалено
  1. 8 18
      app/models/agents/website_agent.rb

+ 8 - 18
app/models/agents/website_agent.rb

@@ -155,13 +155,11 @@ module Agents
155 155
       return unless in_url.present?
156 156
 
157 157
       Array(in_url).each do |url|
158
-        check_url(url).map do |doc|
159
-          create_event payload: doc
160
-        end
158
+        check_url(url)
161 159
       end
162 160
     end
163 161
 
164
-    def check_url(url)
162
+    def check_url(url, payload = {})
165 163
       log "Fetching #{url}"
166 164
       response = faraday.get(url)
167 165
       raise "Failed: #{response.inspect}" unless response.success?
@@ -174,13 +172,12 @@ module Agents
174 172
         end
175 173
         doc = parse(body)
176 174
 
177
-        results = []
178 175
         if extract_full_json?
179 176
           if store_payload!(previous_payloads(1), doc)
180 177
             log "Storing new result for '#{name}': #{doc.inspect}"
181
-            results << doc
178
+            create_event payload: payload.merge(doc)
182 179
           end
183
-          return results
180
+          return
184 181
         end
185 182
 
186 183
         output =
@@ -211,28 +208,21 @@ module Agents
211 208
 
212 209
           if store_payload!(old_events, result)
213 210
             log "Storing new parsed result for '#{name}': #{result.inspect}"
214
-            results << result
211
+            create_event payload: payload.merge(result)
215 212
           end
216 213
         end
217
-
218
-        results
219 214
       }
220 215
     rescue => e
221 216
       error "Error when fetching url: #{e.message}\n#{e.backtrace.join("\n")}"
222
-      return []
223 217
     end
224 218
 
225 219
     def receive(incoming_events)
226 220
       incoming_events.each do |event|
227 221
         interpolate_with(event) do
228 222
           url_to_scrape = event.payload['url']
229
-          docs = []
230
-          docs = check_url(url_to_scrape) if url_to_scrape =~ /^https?:\/\//i
231
-          docs.each do |doc|
232
-            new_payload = interpolated['mode'].to_s == "merge" ? event.payload.dup : {}
233
-            new_payload.merge! doc
234
-            create_event payload: new_payload
235
-          end
223
+          next unless url_to_scrape =~ /^https?:\/\//i
224
+          check_url(url_to_scrape,
225
+                    interpolated['mode'].to_s == "merge" ? event.payload : {})
236 226
         end
237 227
       end
238 228
     end